use std::collections::{HashMap, HashSet};
-use core::{Package, PackageId};
+use core::{Package, PackageId, SourceId};
use core::registry::PackageRegistry;
use core::resolver::{mod, Resolve};
use ops;
let root = package.get_package_id().get_source_id().clone();
try!(registry.add_sources(&[root]));
+ // Here we place an artificial limitation that all non-registry sources
+ // cannot be locked at more than one revision. This means that if a git
+ // repository provides more than one package, they must all be updated in
+ // step when any of them are updated.
+ //
+ // TODO: This seems like a hokey reason to single out the registry as being
+ // different
+ let mut to_avoid_sources = HashSet::new();
+ match to_avoid {
+ Some(set) => {
+ for package_id in set.iter() {
+ let source = package_id.get_source_id();
+ if !source.is_registry() {
+ to_avoid_sources.insert(source);
+ }
+ }
+ }
+ None => {}
+ }
+
let summary = package.get_summary().clone();
let summary = match previous {
Some(r) => {
// ranges. To deal with this, we only actually lock a dependency
// to the previously resolved version if the dependency listed
// still matches the locked version.
- for node in r.iter().filter(|p| keep(p, to_avoid)) {
+ for node in r.iter().filter(|p| keep(p, to_avoid, &to_avoid_sources)) {
let deps = r.deps(node).into_iter().flat_map(|i| i)
- .filter(|p| keep(p, to_avoid))
+ .filter(|p| keep(p, to_avoid, &to_avoid_sources))
.map(|p| p.clone()).collect();
registry.register_lock(node.clone(), deps);
}
let map = r.deps(r.root()).into_iter().flat_map(|i| i).filter(|p| {
- keep(p, to_avoid)
+ keep(p, to_avoid, &to_avoid_sources)
}).map(|d| {
(d.get_name(), d)
}).collect::<HashMap<_, _>>();
}
return Ok(resolved);
- fn keep<'a>(p: &&'a PackageId, to_avoid: Option<&HashSet<&'a PackageId>>)
+ fn keep<'a>(p: &&'a PackageId,
+ to_avoid_packages: Option<&HashSet<&'a PackageId>>,
+ to_avoid_sources: &HashSet<&'a SourceId>)
-> bool {
- match to_avoid {
+ !to_avoid_sources.contains(&p.get_source_id()) && match to_avoid_packages {
Some(set) => !set.contains(p),
None => true,
}
{compiling} project [..]
", dep2.url(), compiling = COMPILING)));
})
+
+test!(update_one_source_updates_all_packages_in_that_git_source {
+ let dep = git_repo("dep", |project| {
+ project.file("Cargo.toml", r#"
+ [package]
+ name = "dep"
+ version = "0.5.0"
+ authors = []
+
+ [dependencies.a]
+ path = "a"
+ "#)
+ .file("src/lib.rs", "")
+ .file("a/Cargo.toml", r#"
+ [package]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+ "#)
+ .file("a/src/lib.rs", "")
+ }).assert();
+
+ let p = project("project")
+ .file("Cargo.toml", format!(r#"
+ [project]
+ name = "project"
+ version = "0.5.0"
+ authors = []
+ [dependencies.dep]
+ git = '{}'
+ "#, dep.url()).as_slice())
+ .file("src/main.rs", "fn main() {}");
+
+ p.build();
+ assert_that(p.process(cargo_dir().join("cargo")).arg("build"),
+ execs().with_status(0));
+
+ let repo = git2::Repository::open(&dep.root()).unwrap();
+ let rev1 = repo.revparse_single("HEAD").unwrap().id();
+
+ // Just be sure to change a file
+ File::create(&dep.root().join("src/lib.rs")).write_str(r#"
+ pub fn bar() -> int { 2 }
+ "#).assert();
+ add(&repo);
+ commit(&repo);
+
+ assert_that(p.process(cargo_dir().join("cargo")).arg("update")
+ .arg("-p").arg("dep"),
+ execs().with_status(0));
+ let lockfile = File::open(&p.root().join("Cargo.lock")).read_to_string()
+ .unwrap();
+ assert!(!lockfile.as_slice().contains(rev1.to_string().as_slice()),
+ "{} in {}", rev1, lockfile);
+})